Fit density (also referred to as CPUE) model with environmental predictors and use that to calculate weighted mean dissolved oxygen, temperature and depth of Baltic cod
# Load libraries, install if needed
library(tidyverse); theme_set(theme_light(base_size = 10))
library(readxl)
library(tidylog)
library(RCurl)
library(viridis)
library(RColorBrewer)
library(patchwork)
library(janitor)
library(icesDatras)
library(mapdata)
library(patchwork)
library(rgdal)
library(raster)
library(sf)
library(rgeos)
library(chron)
library(lattice)
library(ncdf4)
library(marmap)
library(rnaturalearth)
library(rnaturalearthdata)
library(mapplots)
library(qwraps2)
#remotes::install_github("pbs-assess/sdmTMB")
library(sdmTMB)# To load entire cache in interactive r session, do: qwraps2::lazyload_cache_dir(path = "R/analysis/spatial_trend_models_cache/html")
# Specify map ranges
ymin = 54; ymax = 58; xmin = 12; xmax = 22
map_data <- rnaturalearth::ne_countries(
scale = "medium",
returnclass = "sf", continent = "europe")
# Crop the polygon for plotting and efficiency:
# st_bbox(map_data) # find the rough coordinates
swe_coast <- suppressWarnings(suppressMessages(
st_crop(map_data,
c(xmin = xmin, ymin = ymin, xmax = xmax, ymax = ymax))))
# Transform our map into UTM 33 coordinates, which is the equal-area projection we fit in:
utm_zone33 <- 32633
swe_coast_proj <- sf::st_transform(swe_coast, crs = utm_zone33)
ggplot(swe_coast_proj) + geom_sf()
# Define plotting theme for main plot
theme_plot <- function(base_size = 10, base_family = "") {
theme_light(base_size = 10, base_family = "") +
theme(
axis.text.x = element_text(angle = 90),
axis.text = element_text(size = 8),
legend.text = element_text(size = 8),
legend.title = element_text(size = 8),
legend.position = "bottom",
legend.key.height = unit(0.2, "cm"),
legend.margin = margin(0, 0, 0, 0),
legend.box.margin = margin(-5, -5, -5, -5),
strip.text = element_text(size = 8, colour = 'black', margin = margin()),
strip.background = element_rect(fill = "grey90")
)
}
# Define plotting theme for facet_wrap map with years
theme_facet_map <- function(base_size = 10, base_family = "") {
theme_light(base_size = 10, base_family = "") +
theme(
axis.text.x = element_text(angle = 90),
axis.text = element_text(size = 6),
strip.text = element_text(size = 8, colour = 'black', margin = margin()),
strip.background = element_rect(fill = "grey90")
)
}
# Make default base map plot
plot_map_raster <-
ggplot(swe_coast_proj) +
geom_sf(size = 0.3) +
labs(x = "Longitude", y = "Latitude") +
theme_facet_map(base_size = 14)
# Function to convert from lat lon to UTM
LongLatToUTM <- function(x, y, zone){
xy <- data.frame(ID = 1:length(x), X = x, Y = y)
coordinates(xy) <- c("X", "Y")
proj4string(xy) <- CRS("+proj=longlat +datum=WGS84") ## for example
res <- spTransform(xy, CRS(paste("+proj=utm +zone=",zone," ellps=WGS84",sep='')))
return(as.data.frame(res))
}
d <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/cod_condition/master/data/for_analysis/mdat_cpue.csv")
# Calculate standardized variables
d <- d %>%
mutate(oxy_sc = oxy,
temp_sc = temp,
depth_sc = depth,
) %>%
mutate_at(c("oxy_sc", "temp_sc", "depth_sc"),
~(scale(.) %>% as.vector)) %>%
mutate(year = as.integer(year)) %>%
drop_na(oxy, depth, temp) %>%
rename("density_cod" = "density") # to fit better with how flounder is named
# And now read in pred_grid2 which has oxygen values at location and time and depth:
pred_grid2 <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/cod_condition/master/data/for_analysis/pred_grid2.csv")
# Standardize data with respect to prediction grid:
pred_grid2 <- pred_grid2 %>%
mutate(year = as.integer(year)) %>%
filter(year %in% c(unique(d$year))) %>%
mutate(depth_sc = (depth - mean(d$depth))/sd(d$depth),
temp_sc = (temp - mean(d$temp))/sd(d$temp),
oxy_sc = (oxy - mean(d$oxy))/sd(d$oxy)) %>% # Need to scale these to the mean and sd in the data!
drop_na(oxy, depth, temp)
# Add ices_rect
pred_grid2$ices_rect <- ices.rect2(pred_grid2$lon, pred_grid2$lat)
spde <- make_mesh(d, xy_cols = c("X", "Y"),
n_knots = 100,
type = "kmeans", seed = 42)
# Plot and save spde
png(file = "figures/supp/density_spde.png", units = "in", width = 6.5, height = 6.5, res = 300)
plot(spde)
dev.off()
# mcod1 <- sdmTMB(density_cod ~ 1 + s(depth_sc) + s(oxy_sc) + s(temp_sc),
# data = d, spde = spde, family = tweedie(link = "log"),
# fields = "AR1", include_spatial = TRUE, time = "year",
# spatial_only = FALSE, spatial_trend = TRUE, reml = FALSE,
# control = sdmTMBcontrol(newton_steps = 1))
#
# tidy(mcod1, conf.int = TRUE)
#
# d$residualsmcod1 <- residuals(mcod1)
# qqnorm(d$residualsmcod1); abline(a = 0, b = 1)
#
# # Check AR1 param
# mcodsd1 <- as.data.frame(summary(TMB::sdreport(mcod1$tmb_obj)))
# mcodsd1$Estimate[row.names(mcodsd1) == "ar1_phi"]
# mcodsd1$Estimate[row.names(mcodsd1) == "ar1_phi"] +
# c(-2, 2) * mcodsd1$`Std. Error`[row.names(mcodsd1) == "ar1_phi"]
mcod2 <- sdmTMB(density_cod ~ 1 + s(depth_sc),
data = d, spde = spde, family = tweedie(link = "log"),
fields = "AR1", include_spatial = TRUE, time = "year",
spatial_only = FALSE, spatial_trend = TRUE, reml = FALSE,
control = sdmTMBcontrol(newton_steps = 1))
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
tidy(mcod2, conf.int = TRUE)
d$residualsmcod2 <- residuals(mcod2)
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
qqnorm(d$residualsmcod2); abline(a = 0, b = 1)
# mcod3 <- sdmTMB(density_cod ~ 1,
# data = d, spde = spde, family = tweedie(link = "log"),
# fields = "AR1", include_spatial = TRUE, time = "year",
# spatial_only = FALSE, spatial_trend = TRUE, reml = FALSE,
# control = sdmTMBcontrol(newton_steps = 1))
#
# tidy(mcod3, conf.int = TRUE)
#
# d$residualsmcod3 <- residuals(mcod3)
# qqnorm(d$residualsmcod3); abline(a = 0, b = 1)
# mfle1 <- sdmTMB(density_fle ~ 1 + s(depth_sc) + s(oxy_sc) + s(temp_sc),
# data = d, spde = spde, family = tweedie(link = "log"),
# fields = "AR1", include_spatial = TRUE, time = "year",
# spatial_only = FALSE, spatial_trend = TRUE, reml = FALSE,
# control = sdmTMBcontrol(newton_steps = 1))
#
# tidy(mfle1, conf.int = TRUE)
#
# d$residualsmfle1 <- residuals(mfle1)
# qqnorm(d$residualsmfle1); abline(a = 0, b = 1)
#
# # Check AR1 param
# mflesd1 <- as.data.frame(summary(TMB::sdreport(mfle1$tmb_obj)))
# mflesd1$Estimate[row.names(mflesd1) == "ar1_phi"]
# mflesd1$Estimate[row.names(mflesd1) == "ar1_phi"] +
# c(-2, 2) * mflesd1$`Std. Error`[row.names(mflesd1) == "ar1_phi"]
mfle2 <- sdmTMB(density_fle ~ 1 + s(depth_sc),
data = d, spde = spde, family = tweedie(link = "log"),
fields = "AR1", include_spatial = TRUE, time = "year",
spatial_only = FALSE, spatial_trend = TRUE, reml = FALSE,
control = sdmTMBcontrol(newton_steps = 1))
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
tidy(mfle2, conf.int = TRUE)
d$residualsmfle2 <- residuals(mfle2)
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
qqnorm(d$residualsmfle2); abline(a = 0, b = 1)
# mfle3 <- sdmTMB(density_fle ~ 1,
# data = d, spde = spde, family = tweedie(link = "log"),
# fields = "AR1", include_spatial = TRUE, time = "year",
# spatial_only = FALSE, spatial_trend = TRUE, reml = FALSE,
# control = sdmTMBcontrol(newton_steps = 1))
#
# tidy(mfle3, conf.int = TRUE)
#
# d$residualsmfle3 <- residuals(mfle3)
# qqnorm(d$residualsmfle3); abline(a = 0, b = 1)
dc <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/cod_condition/master/data/for_analysis/mdat_cond.csv")
#>
#> ── Column specification ────────────────────────────────────────────────────────
#> cols(
#> .default = col_double(),
#> ices_rect = col_character(),
#> IDr = col_character()
#> )
#> ℹ Use `spec()` for the full column specifications.
# Calculate standardized variables
dc <- dc %>%
mutate(year = as.integer(year),
depth_sc = depth,
ln_weight_g = log(weight_g),
ln_length_cm = log(length_cm)) %>%
mutate(depth_sc_sc = depth_sc - mean(depth_sc))
#> mutate: converted 'year' from double to integer (0 new NA)
#> changed 94,533 values (100%) of 'depth_sc' (0 new NA)
#> new variable 'ln_weight_g' (double) with 3,744 unique values and 0% NA
#> new variable 'ln_length_cm' (double) with 110 unique values and 0% NA
#> mutate: new variable 'depth_sc_sc' (double) with 125 unique values and 0% NA
spde <- make_mesh(dc, xy_cols = c("X", "Y"),
n_knots = 200,
type = "kmeans", seed = 42)
# Plot and save spde
png(file = "figures/supp/condition_spde.png", units = "in", width = 6.5, height = 6.5, res = 300)
plot(spde)
dev.off()
#> quartz_off_screen
#> 2
# Fit model
mcond <- sdmTMB(ln_weight_g ~ 1 + ln_length_cm + s(depth_sc) ,
data = dc, spde = spde, student(link = "identity", df = 5),
fields = "AR1", include_spatial = TRUE, time = "year",
spatial_only = FALSE, spatial_trend = TRUE, reml = FALSE,
control = sdmTMBcontrol(newton_steps = 1))
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
dc$residuals_mcond <- residuals(mcond)
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
png(file = "figures/supp/condition_qq.png", units = "in", width = 6.5, height = 6.5, res = 300)
qqnorm(dc$residuals_mcond); abline(a = 0, b = 1)
dev.off()
#> quartz_off_screen
#> 2
predcod <- predict(mcod2, newdata = pred_grid2)
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
predfle <- predict(mfle2, newdata = pred_grid2)
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
# Condition
predcondition <- predict(mcond, newdata = mutate(pred_grid2, ln_length_cm = 0))
#> mutate: new variable 'ln_length_cm' (double) with one unique value and 0% NA
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
# Cod
p1 <- plot_map_raster +
geom_raster(data = filter(predcod, year == 1999), aes(x = X * 1000, y = Y * 1000, fill = zeta_s)) +
scale_fill_gradient2() +
ggtitle("Cod spatial trend") +
theme_plot()
#> filter: removed 241,306 rows (96%), 9,281 rows remaining
ggsave("figures/zeta_s_cod_map.png", width = 6.5, height = 6.5, dpi = 600)
# Flounder
p2 <- plot_map_raster +
geom_raster(data = filter(predfle, year == 1999), aes(x = X * 1000, y = Y * 1000, fill = zeta_s)) +
scale_fill_gradient2() +
ggtitle("Flounder spatial trend") +
theme_plot()
#> filter: removed 241,306 rows (96%), 9,281 rows remaining
ggsave("figures/zeta_s_fle_map.png", width = 6.5, height = 6.5, dpi = 600)
# Cod condition
p3 <- plot_map_raster +
geom_raster(data = filter(predcondition, year == 1999), aes(x = X * 1000, y = Y * 1000, fill = zeta_s)) +
scale_fill_gradient2() +
ggtitle("Cod condition spatial trend") +
theme_plot()
#> filter: removed 241,306 rows (96%), 9,281 rows remaining
(p1 | p2) / (p3 + (plot_spacer()))
all_pred <- predcod %>%
rename("cod_est" = "est",
"cod_est_non_rf" = "est_non_rf",
"cod_est_rf" = "est_rf",
"cod_omega_s" = "omega_s",
"cod_zeta_s" = "zeta_s",
"cod_epsilon_st" = "epsilon_st") %>%
mutate(fle_est = predfle$est,
fle_est_non_rf = predfle$est_non_rf,
fle_est_rf = predfle$est_rf,
fle_omega_s = predfle$omega_s,
fle_zeta_s = predfle$zeta_s,
fle_epsilon_st = predfle$epsilon_st)
#> rename: renamed 6 variables (cod_est, cod_est_non_rf, cod_est_rf, cod_omega_s, cod_zeta_s, …)
#> mutate: new variable 'fle_est' (double) with 248,975 unique values and 0% NA
#> new variable 'fle_est_non_rf' (double) with 6,279 unique values and 0% NA
#> new variable 'fle_est_rf' (double) with 248,914 unique values and 0% NA
#> new variable 'fle_omega_s' (double) with 9,220 unique values and 0% NA
#> new variable 'fle_zeta_s' (double) with 9,220 unique values and 0% NA
#> new variable 'fle_epsilon_st' (double) with 248,914 unique values and 0% NA
# Add ices_rectangle for grouped summaries further down
all_pred$ices_rect <- ices.rect2(all_pred$lon, all_pred$lat)
ggplot(filter(all_pred, year == 1999), aes(fle_zeta_s, cod_zeta_s)) +
geom_point() +
geom_abline(color = "red")
#> filter: removed 241,306 rows (96%), 9,281 rows remaining
# Cod
plot_map_raster +
geom_raster(data = filter(predcod, year == 1999 & est > quantile(est, prob = 0.25)), aes(x = X * 1000, y = Y * 1000, fill = zeta_s)) +
scale_fill_gradient2() +
ggtitle("Spatial trend effects") +
theme_plot()
#> filter: removed 243,743 rows (97%), 6,844 rows remaining
#> Warning: Raster pixels are placed at uneven vertical intervals and will be
#> shifted. Consider using geom_tile() instead.
# Flounder
plot_map_raster +
geom_raster(data = filter(predfle, year == 1999 & est > quantile(est, prob = 0.25)), aes(x = X * 1000, y = Y * 1000, fill = zeta_s)) +
scale_fill_gradient2() +
ggtitle("Spatial trend effects") +
theme_plot()
#> filter: removed 244,347 rows (98%), 6,240 rows remaining
all_pred %>%
filter(cod_est > quantile(cod_est, prob = 0.25),
fle_est > quantile(fle_est, prob = 0.25)) %>%
ggplot(., aes(fle_zeta_s, cod_zeta_s)) +
geom_point() +
geom_abline(color = "red")
#> filter: removed 93,130 rows (37%), 157,457 rows remaining
# All data
ggplot(d, aes(density_fle, density_cod)) +
geom_point(alpha = 0.1)
# The above plot however, contains rectangles where one species is not abundant,
# but the other could. Hence, this negative co-occurence
# Average densities across rectangles
d_sum <- d %>%
group_by(ices_rect) %>%
summarise(rec_density_cod = sum(density_cod),
rec_density_fle = sum(density_fle)) %>%
ungroup()
#> group_by: one grouping variable (ices_rect)
#> summarise: now 50 rows and 3 columns, ungrouped
#> ungroup: no grouping variables
# Add in the mid-coordinate of each ices rectangle
d_sum$lon <- ices.rect(d_sum$ices_rect)[, 1]
d_sum$lat <- ices.rect(d_sum$ices_rect)[, 2]
# ... And add in the UTM coords base on the lon-lat
utm_coords <- LongLatToUTM(d_sum$lon, d_sum$lat, zone = 33)
#> Warning in showSRID(uprojargs, format = "PROJ", multiline = "NO", prefer_proj =
#> prefer_proj): Discarded datum Unknown based on WGS84 ellipsoid in CRS definition
d_sum$X <- utm_coords$X/1000
d_sum$Y <- utm_coords$Y/1000
# Cod
p1 <- plot_map_raster +
geom_point(data = d_sum, aes(x = X * 1000, y = Y * 1000, color = log(rec_density_cod)),
size = 10, shape = 15) +
scale_color_viridis() +
theme_plot()
# Flounder
p2 <- plot_map_raster +
geom_point(data = d_sum, aes(x = X * 1000, y = Y * 1000, color = log(rec_density_fle)),
size = 10, shape = 15) +
scale_color_viridis() +
theme_plot()
p1/p2
# Now filter rectangles above a certain density threshold (10th percentile)
d_filt <- d_sum %>%
mutate(percentile_10_cod = quantile(rec_density_cod, prob = 0.25),
percentile_10_fle = quantile(rec_density_fle, prob = 0.25)) %>%
mutate(rec_keep_cod = ifelse(rec_density_cod > percentile_10_cod, "Y", "N"),
rec_keep_fle = ifelse(rec_density_fle > percentile_10_fle, "Y", "N")) %>%
filter(rec_keep_cod == "Y" & rec_keep_fle == "Y")
#> mutate: new variable 'percentile_10_cod' (double) with one unique value and 0% NA
#> new variable 'percentile_10_fle' (double) with one unique value and 0% NA
#> mutate: new variable 'rec_keep_cod' (character) with 2 unique values and 0% NA
#> new variable 'rec_keep_fle' (character) with 2 unique values and 0% NA
#> filter: removed 20 rows (40%), 30 rows remaining
# Plot again
# Cod
p3 <- plot_map_raster +
geom_point(data = d_filt, aes(x = X * 1000, y = Y * 1000, color = log(rec_density_cod)),
size = 10, shape = 15) +
scale_color_viridis() +
theme_plot()
# Flounder
p4 <- plot_map_raster +
geom_point(data = d_filt, aes(x = X * 1000, y = Y * 1000, color = log(rec_density_fle)),
size = 10, shape = 15) +
scale_color_viridis() +
theme_plot()
(p1 | p2) / (p3 | p4)
# Now plot the same relationship between flounder and cod density
d %>%
filter(ices_rect %in% d_filt$ices_rect) %>%
ggplot(., aes(density_fle, density_cod)) +
geom_point(alpha = 0.6) +
facet_wrap(~ices_rect, scales = "free") +
theme_facet_map() +
coord_cartesian(expand = 0.02)
#> filter: removed 685 rows (20%), 2,762 rows remaining
p5 <- d %>%
filter(ices_rect %in% d_filt$ices_rect) %>%
ggplot(., aes(density_fle, density_cod)) +
geom_point(alpha = 0.1) +
theme_facet_map() +
stat_smooth() +
coord_cartesian(ylim = c(0, 4000), xlim = c(0, 2000))
#> filter: removed 685 rows (20%), 2,762 rows remaining
# Same, but now use the predicted estimates
d$cod_pred <- predict(mcod2, newdata = d)$est
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
d$fle_pred <- predict(mfle2, newdata = d)$est
#> Warning: Detected a `s()` smoother. Smoothers are penalized in sdmTMB as
#> of version 0.0.19, but used to be unpenalized.
#> You no longer need to specify `k`, specifing `k` now indicates
#> an upper limit on the basis functions, and the degree of wiggliness
#> is determined by the data.
p6 <- d %>%
filter(ices_rect %in% d_filt$ices_rect) %>%
ggplot(., aes(exp(fle_pred), exp(cod_pred))) +
geom_point(alpha = 0.1) +
theme_facet_map() +
stat_smooth() +
coord_cartesian(ylim = c(0, 4000), xlim = c(0, 2000))
#> filter: removed 685 rows (20%), 2,762 rows remaining
p5/p6
#> `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
#> `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
# Pred vs fitted
d %>%
filter(ices_rect %in% d_filt$ices_rect) %>%
ggplot(., aes(density_cod, exp(cod_pred))) +
geom_point(alpha = 0.1) +
geom_abline(color = "red") +
theme_facet_map()
#> filter: removed 685 rows (20%), 2,762 rows remaining
d %>%
filter(ices_rect %in% d_filt$ices_rect) %>%
ggplot(., aes(density_fle, exp(fle_pred))) +
geom_point(alpha = 0.1) +
geom_abline(color = "red") +
theme_facet_map()
#> filter: removed 685 rows (20%), 2,762 rows remaining
# OK, no summarize this a bit. Let's look at presence absence.
pres_abs <- d %>%
filter(ices_rect %in% d_filt$ices_rect) %>%
mutate(cod_pa = ifelse(density_cod > 0, "Y", "N"),
fle_pa = ifelse(density_fle > 0, "Y", "N"))
#> filter: removed 685 rows (20%), 2,762 rows remaining
#> mutate: new variable 'cod_pa' (character) with 2 unique values and 0% NA
#> new variable 'fle_pa' (character) with 2 unique values and 0% NA
pres_abs %>%
#filter(cod_pa == "Y" & fle_pa == "Y") %>%
ggplot(., aes(fle_pa, cod_pa)) +
geom_jitter(alpha = 0.6) +
# facet_wrap(~ices_rect, scales = "free") +
# theme_facet_map() +
# coord_cartesian(expand = 0.02)
NULL
# This is not an optimal plot though because most times you catch either one or the other...
# Calculate the ratio of flounder to cod
all_pred <- all_pred %>%
mutate(fle_cod_prop = exp(all_pred$fle_est) / (exp(all_pred$cod_est) + exp(all_pred$fle_est)))
#> mutate: new variable 'fle_cod_prop' (double) with 248,975 unique values and 0% NA
# Calculate mean proportion to get the baseline
mean_prop <- mean(all_pred$fle_cod_prop)
# Plot ratio on map
plot_map_raster +
geom_raster(data = all_pred, aes(x = X * 1000, y = Y * 1000, fill = fle_cod_prop)) +
scale_fill_gradient2(midpoint = mean_prop) +
ggtitle("Proportion flounder") +
facet_wrap(~ year) +
theme_facet_map()
# Plot delta_difference in proportion in a single map
all_pred_93_grid <- all_pred %>%
filter(year == 1993) %>%
dplyr::select(X, Y, fle_cod_prop) %>%
rename("fle_cod_prop_93" = "fle_cod_prop") %>%
mutate(id = paste(X, Y, sep = "."))
#> filter: removed 241,306 rows (96%), 9,281 rows remaining
#> rename: renamed one variable (fle_cod_prop_93)
#> mutate: new variable 'id' (character) with 9,281 unique values and 0% NA
all_pred_19_grid <- all_pred %>%
filter(year == 2019) %>%
dplyr::select(X, Y, fle_cod_prop) %>%
rename("fle_cod_prop_19" = "fle_cod_prop") %>%
mutate(id = paste(X, Y, sep = ".")) %>%
dplyr::select(-X, -Y)
#> filter: removed 241,306 rows (96%), 9,281 rows remaining
#> rename: renamed one variable (fle_cod_prop_19)
#> mutate: new variable 'id' (character) with 9,281 unique values and 0% NA
all_pred_delta_grid <- left_join(all_pred_93_grid, all_pred_19_grid, by = "id") %>%
mutate(delta_prop = fle_cod_prop_19 - fle_cod_prop_93)
#> left_join: added one column (fle_cod_prop_19)
#> > rows only in x 0
#> > rows only in y ( 0)
#> > matched rows 9,281
#> > =======
#> > rows total 9,281
#> mutate: new variable 'delta_prop' (double) with 9,220 unique values and 0% NA
plot_map_raster +
geom_raster(data = all_pred_delta_grid, aes(x = X * 1000, y = Y * 1000, fill = delta_prop)) +
scale_fill_gradient2() +
ggtitle(paste(expression(delta), "proportion flounder")) +
theme_plot()
# Plot the delta_proportion flounder vs the spatial trend of cod_condition
all_pred_delta_grid <- all_pred_delta_grid %>% mutate(id = paste(X, Y, sep = "."))
#> mutate: no changes
predcondition_1999 <- predcondition %>% mutate(id = paste(X, Y, sep = ".")) %>% filter(year == 1999)
#> mutate: new variable 'id' (character) with 9,281 unique values and 0% NA
#> filter: removed 241,306 rows (96%), 9,281 rows remaining
all_pred_delta_grid_w_cond <- left_join(all_pred_delta_grid,
dplyr::select(predcondition_1999, id, zeta_s))
#> Joining, by = "id"
#> left_join: added one column (zeta_s)
#> > rows only in x 0
#> > rows only in y ( 0)
#> > matched rows 9,281
#> > =======
#> > rows total 9,281
ggplot(all_pred_delta_grid_w_cond,
aes(delta_prop, zeta_s)) +
geom_point()
# Same but averaged by ices rect
all_pred_delta_grid_w_cond <- all_pred_delta_grid_w_cond %>% mutate(id = paste(X, Y, sep = "."))
#> mutate: no changes
pred_grid2 <- pred_grid2 %>% mutate(id = paste(X, Y, sep = "."))
#> mutate: new variable 'id' (character) with 9,281 unique values and 0% NA
all_pred_delta_grid_w_cond <- left_join(all_pred_delta_grid_w_cond, dplyr::select(pred_grid2, id, ices_rect))
#> Joining, by = "id"
#> left_join: added one column (ices_rect)
#> > rows only in x 0
#> > rows only in y ( 0)
#> > matched rows 250,587 (includes duplicates)
#> > =========
#> > rows total 250,587
all_pred_delta_grid_w_cond %>%
group_by(ices_rect) %>%
summarise(mean_zeta_s = mean(zeta_s),
mean_delta_prop = mean(delta_prop)) %>%
ungroup() %>%
ggplot(., aes(mean_delta_prop, mean_zeta_s)) +
stat_smooth() +
geom_point()
#> group_by: one grouping variable (ices_rect)
#> summarise: now 61 rows and 3 columns, ungrouped
#> ungroup: no grouping variables
#> `geom_smooth()` using method = 'loess' and formula 'y ~ x'
# Plot average over time by rectangle (centered withing rectangle)
all_pred %>%
group_by(year, ices_rect) %>%
summarise(mean_prop = mean(fle_cod_prop),
sd_prop = sd(fle_cod_prop)) %>%
ungroup() %>%
group_by(ices_rect) %>%
mutate(mean_prop_ct = mean_prop - mean(mean_prop)) %>%
ungroup() %>%
ggplot(., aes(year, mean_prop_ct, color = ices_rect)) +
geom_line(alpha = 0.6) +
stat_smooth(aes(year, mean_prop_ct), method = "gam", formula = y ~ s(x, k = 3),
inherit.aes = FALSE, size = 2, color = "gray20") +
scale_color_viridis(discrete = TRUE, name = "") +
coord_cartesian(expand = 0) +
# theme(legend.position = c(0.13, 0.82),
# legend.text = element_text(size = 6)) +
theme(legend.position = "bottom",
legend.text = element_text(size = 6)) +
guides(color = guide_legend(ncol = 17)) +
NULL
#> group_by: 2 grouping variables (year, ices_rect)
#> summarise: now 1,647 rows and 4 columns, one group variable remaining (year)
#> ungroup: no grouping variables
#> group_by: one grouping variable (ices_rect)
#> mutate (grouped): new variable 'mean_prop_ct' (double) with 1,647 unique values and 0% NA
#> ungroup: no grouping variables
# Plot the distributions of "delta" change by each rectangle
all_pred_93 <- all_pred %>%
group_by(year, ices_rect) %>%
summarise(mean_prop_93 = mean(fle_cod_prop)) %>%
ungroup() %>%
filter(year == 1993)
#> group_by: 2 grouping variables (year, ices_rect)
#> summarise: now 1,647 rows and 3 columns, one group variable remaining (year)
#> ungroup: no grouping variables
#> filter: removed 1,586 rows (96%), 61 rows remaining
all_pred_19 <- all_pred %>%
group_by(year, ices_rect) %>%
summarise(mean_prop19 = mean(fle_cod_prop)) %>%
ungroup() %>%
filter(year == 2019)
#> group_by: 2 grouping variables (year, ices_rect)
#> summarise: now 1,647 rows and 3 columns, one group variable remaining (year)
#> ungroup: no grouping variables
#> filter: removed 1,586 rows (96%), 61 rows remaining
all_pred_delta <- left_join(all_pred_93, dplyr::select(all_pred_19, -year)) %>%
mutate(delta_prop = mean_prop19 - mean_prop_93)
#> Joining, by = "ices_rect"
#> left_join: added one column (mean_prop19)
#> > rows only in x 0
#> > rows only in y ( 0)
#> > matched rows 61
#> > ====
#> > rows total 61
#> mutate: new variable 'delta_prop' (double) with 61 unique values and 0% NA
all_pred_delta %>%
ggplot(., aes(delta_prop, fill = ices_rect)) +
geom_histogram(alpha = 0.6) +
geom_vline(xintercept = 0, linetype = 2, size = 1.5, color = "tomato") +
scale_fill_viridis(discrete = TRUE, name = "") +
coord_cartesian(expand = 0) +
theme(legend.position = "bottom",
legend.text = element_text(size = 6)) +
guides(fill = guide_legend(ncol = 17)) +
NULL
#> `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
# Plot average over time
all_pred %>%
group_by(year) %>%
summarise(mean_prop = mean(fle_cod_prop),
sd_prop = sd(fle_cod_prop)) %>%
ggplot(., aes(year, mean_prop)) +
geom_ribbon(aes(year, y = mean_prop, ymin = mean_prop - mean_prop, ymax = mean_prop + mean_prop),
fill = "grey70") +
geom_line() +
coord_cartesian(expand = 0)
#> group_by: one grouping variable (year)
#> summarise: now 27 rows and 3 columns, ungrouped
# See if I can mimic a carrying capacity in density...
all_pred_rect_sum <- all_pred %>%
group_by(year, ices_rect) %>%
mutate(rect_sum_biom = sum(exp(cod_est) + exp(cod_est))) %>%
ungroup() %>%
group_by(ices_rect) %>%
mutate(rect_sum_biom_ct = rect_sum_biom - mean(rect_sum_biom)) %>%
ungroup()
#> group_by: 2 grouping variables (year, ices_rect)
#> mutate (grouped): new variable 'rect_sum_biom' (double) with 1,647 unique values and 0% NA
#> ungroup: no grouping variables
#> group_by: one grouping variable (ices_rect)
#> mutate (grouped): new variable 'rect_sum_biom_ct' (double) with 1,647 unique values and 0% NA
#> ungroup: no grouping variables
ggplot(all_pred_rect_sum, aes(year, rect_sum_biom/1000, color = ices_rect)) +
geom_line(alpha = 1) +
facet_wrap(~ices_rect, scales = "free") +
guides(color = FALSE)
ggplot(all_pred_rect_sum, aes(year, rect_sum_biom_ct, color = ices_rect)) +
geom_line(alpha = 0.6) +
guides(color = FALSE) +
scale_color_viridis(discrete = TRUE)
Identify why flounder are increasing their spatial range
wm_depth <- predfle %>%
group_by(year) %>%
summarise(#depth_wm = weighted.mean(depth, exp(est)), # This is the mean
"Density-weighted (5th decile, median)" = hutils::weighted_quantile(v = depth, w = exp(est), p = c(0.5)),
"Density-weighted (1st decile)" = hutils::weighted_quantile(v = depth, w = exp(est), p = c(0.1)),
"Density-weighted (9th decile)" = hutils::weighted_quantile(v = depth, w = exp(est), p = c(0.9))) %>%
pivot_longer(cols = c("Density-weighted (5th decile, median)", "Density-weighted (1st decile)", "Density-weighted (9th decile)"),
names_to = "series", values_to = "depth") %>%
group_by(series) %>% # standarize within for easy plotting
mutate(depth_ct = depth - mean(depth)) %>%
ungroup()
#> group_by: one grouping variable (year)
#> summarise: now 27 rows and 4 columns, ungrouped
#> pivot_longer: reorganized (Density-weighted (5th decile, median), Density-weighted (1st decile), Density-weighted (9th decile)) into (series, depth) [was 27x4, now 81x3]
#> group_by: one grouping variable (series)
#> mutate (grouped): new variable 'depth_ct' (double) with 60 unique values and 0% NA
#> ungroup: no grouping variables
ggplot(wm_depth, aes(year, depth, color = series, group = series, fill = series)) +
stat_smooth(method = "gam", formula = y ~ s(x, k = 4), se = FALSE, size = 1) +
geom_point(size = 1.5, alpha = 0.8, color = "white", shape = 21) +
scale_x_continuous(breaks = scales::pretty_breaks(n = 10)) +
facet_wrap(~series) +
scale_color_viridis(discrete = TRUE) +
scale_fill_viridis(discrete = TRUE) +
guides(fill = FALSE, color = guide_legend(nrow = 4)) +
labs(y = "Depth [m]", x = "Year", color = "") +
theme_plot() +
theme(legend.position = "right") +
NULL
knitr::knit_exit()